From 8f4fc2d2bc91349d5c63470d77289ded7a5be323 Mon Sep 17 00:00:00 2001 From: David Vrabel Date: Mon, 13 Jul 2015 11:50:51 +0200 Subject: [PATCH] x86: reintroduce read_unlock() optimization Commit 902d1b5c310fb63b511f0b967cf5f32d3f605f3d (x86,arm: remove asm/spinlock.h from all architectures) inadvertantly removed an x86-specific optimization for read_unlock*(). Re-add asm/spinlock.h to allow architectures to provide an optmized _raw_read_unlock() and make x86 provide the previous implementation. Reported-by: Jan Beulich Signed-off-by: David Vrabel --- xen/include/asm-arm/spinlock.h | 6 ++++++ xen/include/asm-x86/spinlock.h | 7 +++++++ xen/include/xen/spinlock.h | 1 + 3 files changed, 14 insertions(+) create mode 100644 xen/include/asm-arm/spinlock.h create mode 100644 xen/include/asm-x86/spinlock.h diff --git a/xen/include/asm-arm/spinlock.h b/xen/include/asm-arm/spinlock.h new file mode 100644 index 0000000000..81955d1697 --- /dev/null +++ b/xen/include/asm-arm/spinlock.h @@ -0,0 +1,6 @@ +#ifndef __ASM_SPINLOCK_H +#define __ASM_SPINLOCK_H + +/* Nothing ARM specific. */ + +#endif /* __ASM_SPINLOCK_H */ diff --git a/xen/include/asm-x86/spinlock.h b/xen/include/asm-x86/spinlock.h new file mode 100644 index 0000000000..7d69e75a93 --- /dev/null +++ b/xen/include/asm-x86/spinlock.h @@ -0,0 +1,7 @@ +#ifndef __ASM_SPINLOCK_H +#define __ASM_SPINLOCK_H + +#define _raw_read_unlock(l) \ + asm volatile ( "lock; dec%z0 %0" : "+m" ((l)->lock) :: "memory" ) + +#endif /* __ASM_SPINLOCK_H */ diff --git a/xen/include/xen/spinlock.h b/xen/include/xen/spinlock.h index 9286543bea..fb0438e542 100644 --- a/xen/include/xen/spinlock.h +++ b/xen/include/xen/spinlock.h @@ -2,6 +2,7 @@ #define __SPINLOCK_H__ #include +#include #ifndef NDEBUG struct lock_debug { -- 2.30.2